In [1]:
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf

Using activation function

You can use activation function which prepared by tensorflow. It is in tf.nn library!


In [2]:
sess = tf.Session()

In [3]:
# Relu
print(sess.run(tf.nn.relu([-3., 3., 10.])))


[  0.   3.  10.]

In [4]:
# Relu6
# Set upper limit as 6
print(sess.run(tf.nn.relu6([-3., 3., 10.])))


[ 0.  3.  6.]

In [5]:
# sigmoid
print(sess.run(tf.nn.sigmoid([-1., 0., 1.])))


[ 0.26894143  0.5         0.7310586 ]

In [6]:
# Hyperbolic tangent
print(sess.run(tf.nn.tanh([-1., 0., 1.])))


[-0.76159418  0.          0.76159418]

In [7]:
# Softsign function
print(sess.run(tf.nn.softsign([-1., 0., 1.])))


[-0.5  0.   0.5]

In [8]:
# Softplus function
print(sess.run(tf.nn.softplus([-1., 0., 1.])))


[ 0.31326166  0.69314718  1.31326163]

In [9]:
# ELU(Exponential Linear Unit)
print(sess.run(tf.nn.elu([-1., 0., 1.])))


[-0.63212055  0.          1.        ]

In [ ]: